home *** CD-ROM | disk | FTP | other *** search
Wrap
# Source Generated with Decompyle++ # File: in.pyo (Python 2.5) from __future__ import with_statement __author__ = 'Joe Gregorio (joe@bitworking.org)' __copyright__ = 'Copyright 2006, Joe Gregorio' __contributors__ = [ 'Thomas Broyer (t.broyer@ltgt.net)', 'James Antill', 'Xavier Verges Farrero', 'Jonathan Feinberg', 'Blair Zajac', 'Sam Ruby', 'Louis Nyffenegger'] __license__ = 'MIT' __version__ = 'digsby' import re import sys import email import email.Utils as email import email.Message as email import StringIO import gzip import zlib import httplib import urlparse import base64 import os import copy import calendar import time import random import hmac from hashlib import md5, sha1 from gettext import gettext as _ import socket from logging import getLogger log = getLogger('httplib2') try: import socks except ImportError: socks = None if sys.version_info >= (2, 3): from iri2uri import iri2uri else: def iri2uri(uri): return uri __all__ = [ 'Http', 'Response', 'ProxyInfo', 'HttpLib2Error', 'RedirectMissingLocation', 'RedirectLimit', 'FailedToDecompressContent', 'UnimplementedDigestAuthOptionError', 'UnimplementedHmacDigestAuthOptionError', 'debuglevel', 'cached_content', 'set_default_proxy', 'get_default_proxy'] debuglevel = 0 if sys.version_info < (2, 4): def sorted(seq): seq.sort() return seq def HTTPResponse__getheaders(self): if self.msg is None: raise httplib.ResponseNotReady() return self.msg.items() if not hasattr(httplib.HTTPResponse, 'getheaders'): httplib.HTTPResponse.getheaders = HTTPResponse__getheaders class HttpLib2Error(Exception): pass class HttpLib2ErrorWithResponse(HttpLib2Error): def __init__(self, desc, response, content): self.response = response self.content = content HttpLib2Error.__init__(self, desc) class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass class RedirectLimit(HttpLib2ErrorWithResponse): pass class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass class RelativeURIError(HttpLib2Error): pass class ServerNotFoundError(HttpLib2Error): pass DEFAULT_MAX_REDIRECTS = 5 HOP_BY_HOP = [ 'connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade'] def set_default_proxy(proxy_info): ProxyInfo.default_proxy_info = proxy_info def get_default_proxy(): return ProxyInfo.default_proxy_info def _get_end2end_headers(response): hopbyhop = list(HOP_BY_HOP) []([ x.strip() for x in response.get('connection', '').split(',') ]) return _[2] URI = re.compile('^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\\?([^#]*))?(#(.*))?') def parse_uri(uri): groups = URI.match(uri).groups() return (groups[1], groups[3], groups[4], groups[6], groups[8]) def urlnorm(uri): (scheme, authority, path, query, fragment) = parse_uri(uri) if not scheme or not authority: raise RelativeURIError('Only absolute URIs are allowed. uri = %s' % uri) authority = authority.lower() scheme = scheme.lower() if not path: path = '/' if not query or '?'.join([ path, query]): pass request_uri = path scheme = scheme.lower() defrag_uri = scheme + '://' + authority + request_uri return (scheme, authority, request_uri, defrag_uri) re_url_scheme = re.compile('^\\w+://') re_slash = re.compile('[?/:|]+') def safename(filename): try: if re_url_scheme.match(filename): if isinstance(filename, str): filename = filename.decode('utf-8') filename = filename.encode('idna') else: filename = filename.encode('idna') except UnicodeError: pass if isinstance(filename, unicode): filename = filename.encode('utf-8') filemd5 = md5(filename).hexdigest() filename = re_url_scheme.sub('', filename) filename = re_slash.sub(',', filename) if len(filename) > 200: filename = filename[:200] return ','.join((filename, filemd5)) NORMALIZE_SPACE = re.compile('(?:\\r\\n)?[ \\t]+') def _normalize_headers(headers): return []([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip()) for key, value in headers.iteritems() ]) def _parse_cache_control(headers): retval = { } return retval USE_WWW_AUTH_STRICT_PARSING = 0 WWW_AUTH_STRICT = re.compile('^(?:\\s*(?:,\\s*)?([^\\0-\\x1f\\x7f-\\xff()<>@,;:\\\\\\"/[\\]?={} \\t]+)\\s*=\\s*\\"?((?<=\\")(?:[^\\0-\\x08\\x0A-\\x1f\\x7f-\\xff\\\\\\"]|\\\\[\\0-\\x7f])*?(?=\\")|(?<!\\")[^\\0-\\x1f\\x7f-\\xff()<>@,;:\\\\\\"/[\\]?={} \\t]+(?!\\"))\\"?)(.*)$') WWW_AUTH_RELAXED = re.compile('^(?:\\s*(?:,\\s*)?([^ \\t\\r\\n=]+)\\s*=\\s*\\"?((?<=\\")(?:[^\\\\\\"]|\\\\.)*?(?=\\")|(?<!\\")[^ \\t\\r\\n,]+(?!\\"))\\"?)(.*)$') UNQUOTE_PAIRS = re.compile('\\\\(.)') def _parse_www_authenticate(headers, headername = 'www-authenticate'): retval = { } if headers.has_key(headername): authenticate = headers[headername].strip() if not USE_WWW_AUTH_STRICT_PARSING or WWW_AUTH_STRICT: pass www_auth = WWW_AUTH_RELAXED while authenticate: if headername == 'authentication-info': auth_scheme = 'digest' the_rest = authenticate else: (auth_scheme, the_rest) = authenticate.split(' ', 1) match = www_auth.search(the_rest) auth_params = { } while match: if match and len(match.groups()) == 3: (key, value, the_rest) = match.groups() auth_params[key.lower()] = UNQUOTE_PAIRS.sub('\\1', value) match = www_auth.search(the_rest) retval[auth_scheme.lower()] = auth_params authenticate = the_rest.strip() return retval def _entry_disposition(response_headers, request_headers): retval = 'STALE' cc = _parse_cache_control(request_headers) cc_response = _parse_cache_control(response_headers) if request_headers.has_key('pragma') and request_headers['pragma'].lower().find('no-cache') != -1: retval = 'TRANSPARENT' if 'cache-control' not in request_headers: request_headers['cache-control'] = 'no-cache' elif cc.has_key('no-cache'): retval = 'TRANSPARENT' elif cc_response.has_key('no-cache'): retval = 'STALE' elif cc.has_key('only-if-cached'): retval = 'FRESH' elif response_headers.has_key('date'): date = calendar.timegm(email.Utils.parsedate_tz(response_headers['date'])) now = time.time() current_age = max(0, now - date) if cc_response.has_key('max-age'): try: freshness_lifetime = int(cc_response['max-age']) except ValueError: freshness_lifetime = 0 except: None<EXCEPTION MATCH>ValueError None<EXCEPTION MATCH>ValueError if response_headers.has_key('expires'): expires = email.Utils.parsedate_tz(response_headers['expires']) if None == expires: freshness_lifetime = 0 else: freshness_lifetime = max(0, calendar.timegm(expires) - date) else: freshness_lifetime = 0 if cc.has_key('max-age'): try: freshness_lifetime = int(cc['max-age']) except ValueError: freshness_lifetime = 0 except: None<EXCEPTION MATCH>ValueError None<EXCEPTION MATCH>ValueError if cc.has_key('min-fresh'): try: min_fresh = int(cc['min-fresh']) except ValueError: min_fresh = 0 current_age += min_fresh if freshness_lifetime > current_age: retval = 'FRESH' return retval def _decompressContent(response, new_content): content = new_content try: encoding = response.get('content-encoding', None) if encoding in ('gzip', 'deflate'): if encoding == 'gzip': content = gzip.GzipFile(fileobj = StringIO.StringIO(new_content)).read() if encoding == 'deflate': content = zlib.decompress(content) response['content-length'] = str(len(content)) response['-content-encoding'] = response['content-encoding'] del response['content-encoding'] except IOError: content = '' raise FailedToDecompressContent(_('Content purported to be compressed with %s but failed to decompress.') % response.get('content-encoding'), response, content) return content def _updateCache(request_headers, response_headers, content, cache, cachekey): if cachekey: cc = _parse_cache_control(request_headers) cc_response = _parse_cache_control(response_headers) if cc.has_key('no-store') or cc_response.has_key('no-store'): cache.delete(cachekey) else: info = email.Message.Message() for key, value in response_headers.iteritems(): if key not in ('status', 'content-encoding', 'transfer-encoding'): info[key] = value continue status = response_headers.status if status == 304: status = 200 status_header = 'status: %d\r\n' % response_headers.status header_str = info.as_string() header_str = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', header_str) text = ''.join([ status_header, header_str, content]) cache.set(cachekey, text, content) def _cnonce(): dig = time.ctime()([] % ([], [ '0123456789'[random.randrange(0, 9)] for i in range(20) ])).hexdigest() return dig[:16] def _wsse_username_token(cnonce, iso_now, password): return base64.encodestring(sha1('%s%s%s' % (cnonce, iso_now, password)).digest()).strip() class Authentication(object): def __init__(self, credentials, host, request_uri, headers, response, content, http): (scheme, authority, path, query, fragment) = parse_uri(request_uri) self.path = path self.host = host self.credentials = credentials self.http = http def depth(self, request_uri): (scheme, authority, path, query, fragment) = parse_uri(request_uri) return request_uri[len(self.path):].count('/') def inscope(self, host, request_uri): (scheme, authority, path, query, fragment) = parse_uri(request_uri) if host == self.host: pass return path.startswith(self.path) def request(self, method, request_uri, headers, content): pass def response(self, response, content): return False class BasicAuthentication(Authentication): def __init__(self, credentials, host, request_uri, headers, response, content, http): Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) def request(self, method, request_uri, headers, content): headers['authorization'] = 'Basic ' + base64.encodestring('%s:%s' % self.credentials).strip() class DigestAuthentication(Authentication): def __init__(self, credentials, host, request_uri, headers, response, content, http): Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) challenge = _parse_www_authenticate(response, 'www-authenticate') self.challenge = challenge['digest'] qop = self.challenge.get('qop') if not [] in [ x.strip() for x in qop.split() ] or 'auth': pass self.challenge['qop'] = None self.challenge['algorithm'] = self.challenge.get('algorithm', 'MD5') if self.challenge['algorithm'] != 'MD5': raise UnimplementedDigestAuthOptionError(_('Unsupported value for algorithm: %s.' % self.challenge['algorithm'])) self.A1 = ''.join([ self.credentials[0], ':', self.challenge['realm'], ':', self.credentials[1]]) self.challenge['nc'] = 1 def request(self, method, request_uri, headers, content, cnonce = None): H = lambda x: md5(x).hexdigest() KD = lambda s, d: H('%s:%s' % (s, d)) A2 = ''.join([ method, ':', request_uri]) if not cnonce: pass self.challenge['cnonce'] = _cnonce() request_digest = '"%s"' % KD(H(self.A1), '%s:%s:%s:%s:%s' % (self.challenge['nonce'], '%08x' % self.challenge['nc'], self.challenge['cnonce'], self.challenge['qop'], H(A2))) headers['Authorization'] = 'Digest username="%s", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"' % (self.credentials[0], self.challenge['realm'], self.challenge['nonce'], request_uri, self.challenge['algorithm'], request_digest, self.challenge['qop'], self.challenge['nc'], self.challenge['cnonce']) self.challenge['nc'] += 1 def response(self, response, content): if not response.has_key('authentication-info'): challenge = _parse_www_authenticate(response, 'www-authenticate').get('digest', { }) if 'true' == challenge.get('stale'): self.challenge['nonce'] = challenge['nonce'] self.challenge['nc'] = 1 return True else: updated_challenge = _parse_www_authenticate(response, 'authentication-info').get('digest', { }) if updated_challenge.has_key('nextnonce'): self.challenge['nonce'] = updated_challenge['nextnonce'] self.challenge['nc'] = 1 return False class HmacDigestAuthentication(Authentication): __author__ = 'Thomas Broyer (t.broyer@ltgt.net)' def __init__(self, credentials, host, request_uri, headers, response, content, http): Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) challenge = _parse_www_authenticate(response, 'www-authenticate') self.challenge = challenge['hmacdigest'] self.challenge['reason'] = self.challenge.get('reason', 'unauthorized') if self.challenge['reason'] not in ('unauthorized', 'integrity'): self.challenge['reason'] = 'unauthorized' self.challenge['salt'] = self.challenge.get('salt', '') if not self.challenge.get('snonce'): raise UnimplementedHmacDigestAuthOptionError(_("The challenge doesn't contain a server nonce, or this one is empty.")) self.challenge['algorithm'] = self.challenge.get('algorithm', 'HMAC-SHA-1') if self.challenge['algorithm'] not in ('HMAC-SHA-1', 'HMAC-MD5'): raise UnimplementedHmacDigestAuthOptionError(_('Unsupported value for algorithm: %s.' % self.challenge['algorithm'])) self.challenge['pw-algorithm'] = self.challenge.get('pw-algorithm', 'SHA-1') if self.challenge['pw-algorithm'] not in ('SHA-1', 'MD5'): raise UnimplementedHmacDigestAuthOptionError(_('Unsupported value for pw-algorithm: %s.' % self.challenge['pw-algorithm'])) if self.challenge['algorithm'] == 'HMAC-MD5': self.hashmod = md5 else: self.hashmod = sha1 if self.challenge['pw-algorithm'] == 'MD5': self.pwhashmod = md5 else: self.pwhashmod = sha1 self.key = ''.join([ self.credentials[0], ':', self.pwhashmod(''.join([ self.credentials[1], self.challenge['salt']])).hexdigest().lower(), ':', self.challenge['realm']]) self.key = self.pwhashmod(self.key).hexdigest().lower() def request(self, method, request_uri, headers, content): keys = _get_end2end_headers(headers) keylist = []([ '%s ' % k for k in keys ]) headers_val = []([ headers[k] for k in keys ]) created = time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime()) cnonce = _cnonce() request_digest = '%s:%s:%s:%s:%s' % (method, request_uri, cnonce, self.challenge['snonce'], headers_val) request_digest = hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower() headers['Authorization'] = 'HMACDigest username="%s", realm="%s", snonce="%s", cnonce="%s", uri="%s", created="%s", response="%s", headers="%s"' % (self.credentials[0], self.challenge['realm'], self.challenge['snonce'], cnonce, request_uri, created, request_digest, keylist) def response(self, response, content): challenge = _parse_www_authenticate(response, 'www-authenticate').get('hmacdigest', { }) if challenge.get('reason') in ('integrity', 'stale'): return True return False class WsseAuthentication(Authentication): def __init__(self, credentials, host, request_uri, headers, response, content, http): Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) def request(self, method, request_uri, headers, content): headers['Authorization'] = 'WSSE profile="UsernameToken"' iso_now = time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime()) cnonce = _cnonce() password_digest = _wsse_username_token(cnonce, iso_now, self.credentials[1]) headers['X-WSSE'] = 'UsernameToken Username="%s", PasswordDigest="%s", Nonce="%s", Created="%s"' % (self.credentials[0], password_digest, cnonce, iso_now) class GoogleLoginAuthentication(Authentication): def __init__(self, credentials, host, request_uri, headers, response, content, http): urlencode = urlencode import urllib Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) challenge = _parse_www_authenticate(response, 'www-authenticate') service = challenge['googlelogin'].get('service', 'xapi') if service == 'xapi' and request_uri.find('calendar') > 0: service = 'cl' auth = dict(Email = credentials[0], Passwd = credentials[1], service = service, source = headers['user-agent']) (resp, content) = self.http.request('https://www.google.com/accounts/ClientLogin', method = 'POST', body = urlencode(auth), headers = { 'Content-Type': 'application/x-www-form-urlencoded' }) lines = content.split('\n') d = [](_[1]) def request(self, method, request_uri, headers, content): headers['authorization'] = 'GoogleLogin Auth=' + self.Auth AUTH_SCHEME_CLASSES = { 'basic': BasicAuthentication, 'wsse': WsseAuthentication, 'digest': DigestAuthentication, 'hmacdigest': HmacDigestAuthentication, 'googlelogin': GoogleLoginAuthentication } AUTH_SCHEME_ORDER = [ 'hmacdigest', 'googlelogin', 'digest', 'wsse', 'basic'] from path import path from threading import RLock class ThreadsafeFileCache(object): def __init__(self, cache): self.cache = path(cache) self.lock = RLock() if not self.cache.exists(): self.cache.makedirs() def get(self, key): self.lock.__enter__() try: return (self.cache / safename(key)).bytes() except IOError: self.lock self.lock except: self.lock finally: pass def getpath(self, key): return self.cache / (safename(key) + '.dat') def set(self, key, value, content): filepath = self.cache / safename(key) self.lock.__enter__() try: filepath.write_bytes(value) self.getpath(key).write_bytes(content) finally: pass def delete(self, key): filepath = self.cache / safename(key) self.lock.__enter__() try: if filepath.exists(): filepath.remove() if contentpath.exists(): contentpath.remove() finally: pass def __repr__(self): return '<ThreadsafeFileCache %s>' % self.cache class Credentials(object): def __init__(self): self.credentials = [] def add(self, name, password, domain = ''): self.credentials.append((domain.lower(), name, password)) def clear(self): self.credentials = [] def iter(self, domain): for cdomain, name, password in self.credentials: if cdomain == '' or domain == cdomain: yield (name, password) continue class KeyCerts(Credentials): pass class ProxyInfo(object): def get_default_proxy(): return ProxyInfo.default_proxy_info get_default_proxy = staticmethod(get_default_proxy) default_proxy_info = None def __init__(self, proxy_type, proxy_host, proxy_port, proxy_rdns = None, proxy_user = None, proxy_pass = None): (self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns, self.proxy_user, self.proxy_pass) = (proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass) def astuple(self): return (self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns, self.proxy_user, self.proxy_pass) def isgood(self): if socks and self.proxy_host != None: pass return self.proxy_port != None class HTTPConnectionWithTimeout(httplib.HTTPConnection): def __init__(self, host, port = None, strict = None, timeout = None, proxy_info = None): httplib.HTTPConnection.__init__(self, host, port, strict) self.timeout = timeout if not proxy_info: pass self.proxy_info = ProxyInfo.get_default_proxy() def connect(self): msg = 'getaddrinfo returns an empty list' for res in socket.getaddrinfo(self.host, self.port, 0, socket.SOCK_STREAM): (af, socktype, proto, canonname, sa) = res try: if self.proxy_info and self.proxy_info.isgood(): if self.proxy_info.proxy_type == socks.PROXY_TYPE_HTTP: self.sock = socket.socket(af, socktype, proto) self._orig_host = self.host self._orig_port = self.port self.host = self.proxy_info.proxy_host self.port = self.proxy_info.proxy_port else: self.sock = socks.socksocket(af, socktype, proto) self.sock.setproxy(*self.proxy_info.astuple()) else: self.sock = socket.socket(af, socktype, proto) if self.timeout is not None: self.sock.settimeout(self.timeout) if self.debuglevel > 0: print 'connect: (%s, %s)' % (self.host, self.port) self.sock.connect(sa) except socket.error: msg = None if self.debuglevel > 0: print 'connect fail:', (self.host, self.port), msg if self.sock: self.sock.close() self.sock = None continue if not self.sock: raise socket.error, msg if hasattr(httplib, 'HTTPSConnection'): class HTTPSConnectionWithTimeout(httplib.HTTPSConnection): def __init__(self, host, port = None, key_file = None, cert_file = None, strict = None, timeout = None, proxy_info = None): self.timeout = timeout self.proxy_info = proxy_info httplib.HTTPSConnection.__init__(self, host, port = port, key_file = key_file, cert_file = cert_file, strict = strict) def connect(self): if self.proxy_info and self.proxy_info.isgood(): sock = socks.socksocket(socket.AF_INET, socket.SOCK_STREAM) sock.setproxy(*self.proxy_info.astuple()) else: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if self.timeout is not None: sock.settimeout(self.timeout) sock.connect((self.host, self.port)) ssl = socket.ssl(sock, self.key_file, self.cert_file) self.sock = httplib.FakeSocket(sock, ssl) redirect_responses = set([ 300, 301, 302, 303, 307]) class Http(object): def __init__(self, cache = None, timeout = None, proxy_info = None): if not proxy_info: pass self.proxy_info = ProxyInfo.get_default_proxy() self.connections = { } if cache and isinstance(cache, basestring): self.cache = ThreadsafeFileCache(cache) else: self.cache = cache self.credentials = Credentials() self.certificates = KeyCerts() self.authorizations = [] self.follow_redirects = True self.follow_all_redirects = False self.ignore_etag = False self.force_exception_to_status_code = False self.timeout = timeout def _auth_from_challenge(self, host, request_uri, headers, response, content): challenges = _parse_www_authenticate(response, 'www-authenticate') for cred in self.credentials.iter(host): for scheme in AUTH_SCHEME_ORDER: if challenges.has_key(scheme): yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self) continue def add_credentials(self, name, password, domain = ''): self.credentials.add(name, password, domain) def add_certificate(self, key, cert, domain): self.certificates.add(key, cert, domain) def clear_credentials(self): self.credentials.clear() self.authorizations = [] def _conn_request(self, conn, request_uri, method, body, headers): for i in range(2): try: conn.request(method, request_uri, body, headers) response = conn.getresponse() except socket.gaierror: conn.close() raise ServerNotFoundError('Unable to find the server at %s' % conn.host) except (socket.error, httplib.HTTPException): if i == 0: conn.close() conn.connect() continue else: raise except: i == 0 content = response.read() response = Response(response) if method != 'HEAD': content = _decompressContent(response, content) return (response, content) def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey): auths = _[1] if not auths or sorted(auths)[0][1]: pass auth = None (response, content) = self._conn_request(conn, request_uri, method, body, headers) if auth: if auth.response(response, body): auth.request(method, request_uri, headers, body) (response, content) = self._conn_request(conn, request_uri, method, body, headers) response._stale_digest = 1 if response.status == 401: for authorization in self._auth_from_challenge(host, request_uri, headers, response, content): authorization.request(method, request_uri, headers, body) (response, content) = self._conn_request(conn, request_uri, method, body, headers) if response.status != 401: self.authorizations.append(authorization) authorization.response(response, body) break continue if self.follow_all_redirects and method in ('GET', 'HEAD') or response.status == 303: if self.follow_redirects and response.status in redirect_responses: if redirections: if not response.has_key('location') and response.status != 300: raise RedirectMissingLocation(_('Redirected but the response is missing a Location: header.'), response, content) if response.has_key('location'): location = response['location'] (scheme, authority, path, query, fragment) = parse_uri(location) if authority == None: response['location'] = urlparse.urljoin(absolute_uri, location) if response.status == 301 and method in ('GET', 'HEAD'): response['-x-permanent-redirect-url'] = response['location'] if not response.has_key('content-location'): response['content-location'] = absolute_uri _updateCache(headers, response, content, self.cache, cachekey) if cachekey: response.cache_path = self.cache.getpath(cachekey) if headers.has_key('if-none-match'): del headers['if-none-match'] if headers.has_key('if-modified-since'): del headers['if-modified-since'] if response.has_key('location'): location = response['location'] old_response = copy.deepcopy(response) if not old_response.has_key('content-location'): old_response['content-location'] = absolute_uri if not response.status == 303 or method not in ('GET', 'HEAD') or 'GET': pass redirect_method = method (response, content) = self.request(location, redirect_method, body = body, headers = headers, redirections = redirections - 1) response.previous = old_response else: raise RedirectLimit(_('Redirected more times than rediection_limit allows.'), response, content) elif response.status in (200, 203) and method == 'GET': if not response.has_key('content-location'): response['content-location'] = absolute_uri _updateCache(headers, response, content, self.cache, cachekey) if cachekey: response.cache_path = self.cache.getpath(cachekey) return (response, content) def request(self, uri, method = 'GET', body = None, headers = None, redirections = DEFAULT_MAX_REDIRECTS, connection_type = None, decode = True): try: if headers is None: headers = { } else: headers = _normalize_headers(headers) if not headers.has_key('user-agent'): headers['user-agent'] = 'Python-httplib2/%s' % __version__ uri = iri2uri(uri) (scheme, authority, request_uri, defrag_uri) = urlnorm(uri) domain_port = authority.split(':')[0:2] if len(domain_port) == 2 and domain_port[1] == '443' and scheme == 'http': scheme = 'https' authority = domain_port[0] if scheme == 'http' and self.proxy_info and self.proxy_info.isgood() and self.proxy_info.proxy_type == socks.PROXY_TYPE_HTTP: if not request_uri: slash = '/' else: slash = '' request_uri = uri + slash user = self.proxy_info.proxy_user password = self.proxy_info.proxy_pass if user and password: unquote = unquote import urllib user_pass = '%s:%s' % (unquote(user), unquote(password)) creds = base64.b64encode(user_pass).strip() headers['Proxy-Authorization'] = 'Basic ' + creds newhost = self.proxy_info.proxy_host newport = self.proxy_info.proxy_port use_port = newport != httplib.HTTP_PORT if use_port: authority = newhost + ':' + str(newport) else: authority = newhost conn_key = scheme + ':' + authority if conn_key in self.connections: conn = self.connections[conn_key] elif not connection_type: if not scheme == 'https' or HTTPSConnectionWithTimeout: pass connection_type = HTTPConnectionWithTimeout certs = list(self.certificates.iter(authority)) if scheme == 'https' and certs: conn = self.connections[conn_key] = connection_type(authority, key_file = certs[0][0], cert_file = certs[0][1], timeout = self.timeout, proxy_info = self.proxy_info) else: conn = self.connections[conn_key] = connection_type(authority, timeout = self.timeout, proxy_info = self.proxy_info) conn.set_debuglevel(debuglevel) if method in ('GET', 'HEAD') and 'range' not in headers: headers['accept-encoding'] = 'compress, gzip' info = email.Message.Message() cached_value = None if self.cache: cachekey = defrag_uri cached_value = self.cache.get(cachekey) if cached_value: info = email.message_from_string(cached_value) cache_path = self.cache.getpath(cachekey) try: content = cached_value.split('\r\n\r\n', 1)[1] except IndexError: self.cache.delete(cachekey) cachekey = None cached_value = None cache_path = None except: None<EXCEPTION MATCH>IndexError None<EXCEPTION MATCH>IndexError else: cachekey = None if method in ('PUT',) and self.cache and info.has_key('etag') and not (self.ignore_etag) and 'if-match' not in headers: headers['if-match'] = info['etag'] if method not in ('GET', 'HEAD') and self.cache and cachekey: self.cache.delete(cachekey) if cached_value and method in ('GET', 'HEAD') and self.cache and 'range' not in headers: if info.has_key('-x-permanent-redirect-url'): (response, new_content) = self.request(info['-x-permanent-redirect-url'], 'GET', headers = headers, redirections = redirections - 1) response.previous = Response(info) response.previous.fromcache = True else: entry_disposition = _entry_disposition(info, headers) if entry_disposition == 'FRESH': if not cached_value: info['status'] = '504' content = '' response = Response(info) if cached_value: response.fromcache = True response.cache_path = cache_path return (response, content) if entry_disposition == 'STALE': if info.has_key('etag') and not (self.ignore_etag) and 'if-none-match' not in headers: headers['if-none-match'] = info['etag'] if info.has_key('last-modified') and 'last-modified' not in headers: headers['if-modified-since'] = info['last-modified'] elif entry_disposition == 'TRANSPARENT': pass (response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey) if response.status == 304 and method == 'GET': for key in _get_end2end_headers(response): info[key] = response[key] merged_response = Response(info) if hasattr(response, '_stale_digest'): merged_response._stale_digest = response._stale_digest _updateCache(headers, merged_response, content, self.cache, cachekey) response = merged_response response.status = 200 response.fromcache = True response.cache_path = cache_path elif response.status == 200: content = new_content else: self.cache.delete(cachekey) content = new_content else: (response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey) except Exception: e = None if self.force_exception_to_status_code: if isinstance(e, HttpLib2ErrorWithResponse): response = e.response content = e.content response.status = 500 response.reason = str(e) elif isinstance(e, socket.timeout): content = 'Request Timeout' response = Response({ 'content-type': 'text/plain', 'status': '408', 'content-length': len(content) }) response.reason = 'Request Timeout' else: content = str(e) response = Response({ 'content-type': 'text/plain', 'status': '400', 'content-length': len(content) }) response.reason = 'Bad Request' else: raise except: self.force_exception_to_status_code if decode: ctype_data = response.get('content-type', None) if ctype_data is not None: while ctype_data: split = ctype_data.split(';', 1) if len(split) == 1: split = [ split[0], ''] (param, ctype_data) = split param = param.strip() if '=' in param: (key, val) = param.split('=', 1) if key.lower() == 'charset': charset = val try: content = content.decode(charset) except: pass ctype_data = None continue key.lower() == 'charset' return (response, content) HTTP = Http class Response(dict): fromcache = False version = 11 status = 200 reason = 'Ok' previous = None def __init__(self, info): if isinstance(info, httplib.HTTPResponse): for key, value in info.getheaders(): self[key] = value self.status = info.status self['status'] = str(self.status) self.reason = info.reason self.version = info.version elif isinstance(info, email.Message.Message): for key, value in info.items(): self[key] = value self.status = int(self['status']) else: for key, value in info.iteritems(): self[key] = value self.status = int(self.get('status', self.status)) def ok(self): return self.status // 100 == 2 ok = property(ok) def __getattr__(self, name): if name == 'dict': return self else: raise AttributeError, name def cached_content(cache_path): log.info('httplib2.cached_content(%s)' % cache_path) try: try: f = _[2] cached_value = f.read() finally: pass log.info('read %d bytes of data: %r', len(cached_value), cached_value[:40]) except Exception: e = None log.error('error loading cached data from %s', cache_path) return None try: data = cached_value.split('\r\n\r\n', 1)[1] log.info('returning %d bytes of data', len(data)) return data except IndexError: log.error('index error, returning none') return None def main(): import wx import PIL import cStringIO import gui.toolbox as gui def show(data): PIL.Image.open(cStringIO.StringIO(data)).Show() a = wx.PySimpleApp() f = wx.Frame(None) f.Show() url = 'http://www.google.com/intl/en_ALL/images/logo.gif' (resp, data) = Http('c:\\test_cache').request(url) print resp if resp.status <= resp.status: pass elif resp.status < 300: show(data) a.MainLoop() if __name__ == '__main__': main()